In [1]:
%pylab inline
from classy import *
In [2]:
data=load_excel('data/iris.xls',verbose=True)
In [3]:
plot_feature_combinations(data)
or choose which features to plot...
In [4]:
plot_feature_combinations(data,['petal length in cm','sepal width in cm','petal width in cm'])
since we're only interesting in viewing here, let's train with all the data, but just two features.
In [21]:
data_train=extract_features(data,[2,3])
In [22]:
C=NaiveBayes()
In [23]:
timeit(reset=True)
C.fit(data_train.vectors,data_train.targets)
print("Training time: ",timeit())
In [24]:
figure(figsize=(12,8))
plot2D(data_train,C,axis_range=[0,10.5,0,8])
C.plot_centers() # plot the classifier information
In [25]:
C=kNearestNeighbor()
In [26]:
timeit(reset=True)
C.fit(data_train.vectors,data_train.targets)
print("Training time: ",timeit())
In [27]:
figure(figsize=(12,8))
plot2D(data_train,C,axis_range=[0,10.5,0,8])
In [28]:
C=RCE()
In [29]:
timeit(reset=True)
C.fit(data_train.vectors,data_train.targets)
print("Training time: ",timeit())
In [30]:
print("number of centers:",len(C.centers))
In [31]:
figure(figsize=(12,8))
C.plot_centers()
plot2D(data_train,C,axis_range=[0,10.5,-1,8])
In [32]:
C=CSC()
In [33]:
timeit(reset=True)
C.fit(data_train.vectors,data_train.targets)
print("Training time: ",timeit())
In [34]:
figure(figsize=(12,8))
C.plot_centers()
plot2D(data_train,C,axis_range=[0,10.5,-1,8])
In [ ]:
C=Perceptron()
In [35]:
timeit(reset=True)
C.fit(data_train.vectors,data_train.targets)
print(("Training time: ",timeit()))
In [36]:
figure(figsize=(12,8))
plot2D(data_train,C,axis_range=[0,10.5,-1,8])
In [44]:
C=BackProp(hidden_layer_sizes = [4],max_iter=10000,tol=1e-4)
In [45]:
timeit(reset=True)
C.fit(data_train.vectors,data_train.targets)
print(("Training time: ",timeit()))
In [46]:
figure(figsize=(12,8))
plot2D(data_train,C,axis_range=[0,10.5,-1,8])
In [ ]: